# bootstrap-based MDD test at three levels
#btMDD=(mdtBoot$p.value<=levls)*1
#names(btMDD)=c("btMDD_0.10","btMDD_0.05","btMDD_0.01")
#c(pChi,btMDD,tms)
pChi
}
#=========================================================================================>
wmat1=wmat.mammen(nvec[1],B)
MCfun(j=10,datl=datl1a_size,wmat = wmat1,Kern = "Gauss")
MCfun(j=10,datl=datl1a_pow,wmat = wmat1)
MCOBJ_1=list()
# ------------------------- Compute Size
for (l in 1:length(nvec)) {
datl_1 =gdat1(n=nvec[l],gam = 0.0); wmat=wmat.mammen(nvec[l],B)
MCOBJ_1[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
#MCOBJ_1[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
cat("At n = ",nvec[l],"\n")
print(apply(MCOBJ_1[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_1[[l]],1,median))
}
MCOBJ_1=list()
# ------------------------- Compute Size
for (l in 1:length(nvec)) {
datl_1 =gdat1(n=nvec[l],gam = 0.0); wmat=wmat.mammen(nvec[l],B)
#MCOBJ_1[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
MCOBJ_1[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
cat("At n = ",nvec[l],"\n")
print(apply(MCOBJ_1[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_1[[l]],1,median))
}
# print results:
for (l in 1:length(nvec)) {
cat("At n = ",nvec[l],"\n")
print(round(apply(MCOBJ_1[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_1[[l]],1,median),3))
}
#=========================================================================================>
# *** Compute Power Curve Using Design 1 above ***
MCOBJ_2=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat1(n=nvec[2],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_2[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
#MCOBJ_2[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_2[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_2[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At n = ",gamvec[l],"\n")
print(round(apply(MCOBJ_2[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_2[[l]],1,median),3))
}
#=========================================================================================>
# *** Compute Power Curve under local alternatives alternatives ***
gdat3<- function(n,gam=0.0){
datl=list()
for (l in 1:R) {
#generate seed
set.seed(l+10);U=mvrnorm(n,rep(0,2),SigU)
set.seed(l+20);Z=mvrnorm(n,rep(0,2),Sig)
X = (Z[,1] + U[,2])/sqrt(2) #endogenous regressor
Y = exp(X + Z[,2] + 5*abs(gam)*Z[,1]^2/sqrt(n)+U[,1]/sqrt(1+Z[,2]^2))
datl[[l]]<- list(Y=Y,X=cbind(X,Z[,2]),Z=Z)
}
datl
}
MCOBJ_3=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat3(n=nvec[2],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_3[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
#MCOBJ_3[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_3[[l]],1,mean)); cat("\n")
print(apply(MCOBJ_3[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At gam = ",gamvec[l],"\n")
print(round(apply(MCOBJ_3[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_3[[l]],1,median),3))
}
#=================================================>
# Increase the sample size to 800
MCOBJ_3B=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat3(n=nvec[4],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_3B[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
#MCOBJ_3B[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_3B[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_3[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At gam = ",gamvec[l],"\n")
print(round(apply(MCOBJ_3B[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_3[[l]],1,median),3))
}
#=========================================================================================>
# *** Compute Power Curve under high frequency alternatives ***
gdat4<- function(n,gam=0.0){
datl=list()
for (l in 1:R) {
#generate seed
set.seed(l+10);U=mvrnorm(n,rep(0,2),SigU)
set.seed(l+20);Z=mvrnorm(n,rep(0,2),Sig)
X = (Z[,1] + U[,2])/sqrt(2) #endogenous regressor
Y = exp(X + Z[,2] + abs(gam)*sin(2*Z[,1])/sqrt((1+exp(-8))/2) + U[,1]/sqrt(1+Z[,2]^2))
datl[[l]]<- list(Y=Y,X=cbind(X,Z[,2]),Z=Z)
}
datl
}
MCOBJ_4=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat4(n=nvec[2],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_4[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat)
#MCOBJ_4[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_4[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_4[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At gam = ",gamvec[l],"\n")
print(round(apply(MCOBJ_4[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_4[[l]],1,median),3))
}
rm(list=ls())
#=========================================================================================>
# Author: Feiyu Jiang & Emmanuel Selorm Tsyawo
# Project: A Chi-squared test of Mean Independence
# Date began:   Feb 03, 2022
# Last Update:  March 10, 2022
# Place:        Rabat & ...
#=========================================================================================>
# Load packages, clear memonry,
#rm(list=ls())
library(MASS)
library(pbapply)
library(estrpac) #install from github using devtools::install_github("estsyawo/estrpac")
library(ivreg)
#list.files()
#=========================================================================================>
# Comments: Computes the size and power of a pivotal chi-square MDD test of mean independence.
#=========================================================================================>
# Set Parameters
R = 1000 # set number of simulations
B=499 # number of wild bootstrap samples
levls = c(0.10,0.05,0.01) #nominal levels of the test to consider
#nvec=c(50,100,200,400) #sample sizes considered
nvec=c(200,400,600,800) #sample sizes considered
gamvec=seq(-1,1,by=0.2) #tuning the deviation away from the null
(Sig = 0.25^as.matrix(dist(1:2))) #covariance matrix for generating predictors
SigU = diag(2); SigU[2,1]=SigU[1,2]=0.5
specIV.Chi<- function(rObj,X,Z,Kern="Euclid"){
Uhat = rObj$residuals; n=length(Uhat)
Vhat = cbind(Uhat+Z[,1],Uhat+Z[,2],Z^2,Z[,1]*Z[,2])
for (j in 1:ncol(Vhat)) {Vhat[,j]=Vhat[,j]-mean(Vhat[,j])}
Vhat=apply(Vhat, 1, mean) #reduce to univariate case
Ker = Kern.fun(Z,Kern = Kern)
etahat = t(Vhat)%*%Ker%*%Uhat/(n*(n-1))
qZU = 0.5*(c(Uhat)*Ker%*%Vhat+c(Ker%*%c(Uhat))*Vhat)/(n-1)
qZ = Ker%*%Vhat/(n-1)
#introduce constant terms for the intercept
Z=cbind(1,Z);X=cbind(1,X)
R = (t(qZ)%*%X/n)%*%solve(t(Z)%*%X/n)
#Omg1 = 4*crossprod(qZU)/n
Omg1 = 4*cov(qZU)
Omg2 = R%*%(crossprod(Uhat*Z)/n)%*%t(R)
Omg12 = 2*(crossprod(qZU,Uhat*Z)/n)%*%t(R)
Omg = Omg1+Omg2-Omg12-t(Omg12)
wod=aod::wald.test(Sigma = Omg/n,b=etahat,Terms = length(c(etahat)))
chi2 = c(wod$result$chi2[1]);names(chi2)=NULL
df = c(wod$result$chi2[2]);names(df)=NULL
p.value = c(wod$result$chi2[3]);names(p.value)=NULL
# --- return list
list(chi2=chi2,df=df,p.value=p.value)
}
MCfun<- function(j=NULL,datl,wmat,Kern="Euclid"){
dat = datl[[j]] #null operation for looping
Y = dat$Y; X=as.matrix(dat$X); Z = as.matrix(dat$Z)#; n = length(Y)
rObj=ivreg::ivreg(Y~X|Z,x=TRUE,y=TRUE)
timeT=(system.time((mdtChi=specIV.Chi(rObj,X,Z,Kern = Kern)))) #pivotal test
#timeM=system.time((mdtBoot=speclmb.test(rObj,Kern=Kern,B=B,wmat=wmat))) #bootstrap test
#tms = c(timeT[3],timeM[3],timeM[3]/timeT[3]) #use time elapsed
#names(tms)=c("Time_t","Time_bt","Time_Rel_b_t")
pChi=(c(mdtChi$p.value<=levls))*1 #chi-square test
names(pChi)=c("pivChi_0.10","pivChi_0.05","pivChi_0.01")
# bootstrap-based MDD test at three levels
#btMDD=(mdtBoot$p.value<=levls)*1
#names(btMDD)=c("btMDD_0.10","btMDD_0.05","btMDD_0.01")
#c(pChi,btMDD,tms)
pChi
}
#=========================================================================================>
# Specification 1 - compute empirical size
gdat1<- function(n,gam=0.0){
datl=list()
for (l in 1:R) {
#generate seed
set.seed(l+10);U=mvrnorm(n,rep(0,2),SigU)
set.seed(l+20);Z=mvrnorm(n,rep(0,2),Sig)
X = (Z[,1] + U[,2])/sqrt(2) #endogenous regressor
Y = X + Z[,2] + abs(gam)*Z[,1]^2/sqrt(2)+U[,1]/sqrt(1+Z[,2]^2)
datl[[l]]<- list(Y=Y,X=cbind(X,Z[,2]),Z=Z)
}
datl
}
datl1a_size=gdat1(n=nvec[1]);datl1a_pow=gdat1(n=nvec[1],gam = gamvec[1])
#illustration
wmat1=wmat.mammen(nvec[1],B)
MCfun(j=10,datl=datl1a_size,wmat = wmat1,Kern = "Gauss")
MCfun(j=1,datl=datl1a_pow,wmat = wmat1)
MCOBJ_1=list()
# ------------------------- Compute Size
for (l in 1:length(nvec)) {
datl_1 =gdat1(n=nvec[l],gam = 0.0); wmat=wmat.mammen(nvec[l],B)
#MCOBJ_1[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat)
MCOBJ_1[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
cat("At n = ",nvec[l],"\n")
print(apply(MCOBJ_1[[l]],1,mean)); cat("\n")
print(apply(MCOBJ_1[[l]],1,median))
}
# print results:
for (l in 1:length(nvec)) {
cat("At n = ",nvec[l],"\n")
print(round(apply(MCOBJ_1[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_1[[l]],1,median),3))
}
MCOBJ_2=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat1(n=nvec[2],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_2[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_2[[l]],1,mean)); cat("\n")
print(apply(MCOBJ_2[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At n = ",gamvec[l],"\n")
print(round(apply(MCOBJ_2[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_2[[l]],1,median),3))
}
gdat3<- function(n,gam=0.0){
datl=list()
for (l in 1:R) {
#generate seed
set.seed(l+10);U=mvrnorm(n,rep(0,2),SigU)
set.seed(l+20);Z=mvrnorm(n,rep(0,2),Sig)
X = (Z[,1] + U[,2])/sqrt(2) #endogenous regressor
Y = X + Z[,2] + 5*abs(gam)*Z[,1]^2/sqrt(n)+U[,1]/sqrt(1+Z[,2]^2)
datl[[l]]<- list(Y=Y,X=cbind(X,Z[,2]),Z=Z)
}
datl
}
MCOBJ_3=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat3(n=nvec[2],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_3[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
#MCOBJ_3[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_3[[l]],1,mean)); cat("\n")
print(apply(MCOBJ_3[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At gam = ",gamvec[l],"\n")
print(round(apply(MCOBJ_3[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_3[[l]],1,median),3))
}
MCOBJ_3B=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat3(n=nvec[4],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_3B[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat,Kern = "Gauss")
#MCOBJ_3B[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_3B[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_3[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At gam = ",gamvec[l],"\n")
print(round(apply(MCOBJ_3B[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_3[[l]],1,median),3))
}
gdat4<- function(n,gam=0.0){
datl=list()
for (l in 1:R) {
#generate seed
set.seed(l+10);U=mvrnorm(n,rep(0,2),SigU)
set.seed(l+20);Z=mvrnorm(n,rep(0,2),Sig)
X = (Z[,1] + U[,2])/sqrt(2) #endogenous regressor
Y = X + Z[,2] + abs(gam)*sin(2*Z[,1])/sqrt((1+exp(-8))/2) + U[,1]/sqrt(1+Z[,2]^2)
datl[[l]]<- list(Y=Y,X=cbind(X,Z[,2]),Z=Z)
}
datl
}
MCOBJ_4=list()
# ------------------------- Compute Power Curve
for (l in 1:length(gamvec)) {
datl_1 =gdat4(n=nvec[2],gam = gamvec[l])#; wmat=wmat.mammen(nvec[l],B)
MCOBJ_4[[l]]=pbsapply(1:R,MCfun,datl=datl_1,cl=4,wmat=wmat)
#MCOBJ_4[[l]]=sapply(1:R,MCfun,datl=datl_1,wmat=wmat,Kern = "Gauss")
cat("At gam = ",gamvec[l],"\n")
print(apply(MCOBJ_4[[l]],1,mean)); cat("\n")
#print(apply(MCOBJ_4[[l]],1,median))
}
# print results:
for (l in 1:length(gamvec)) {
cat("At gam = ",gamvec[l],"\n")
print(round(apply(MCOBJ_4[[l]],1,mean),3));
# cat("\n")
# print(round(apply(MCOBJ_4[[l]],1,median),3))
}
-124.84 + 0.853*30000
MPC=function(x) -124.84 + 0.853*x
MPC(x=30000)
APC=function(x) -124.84/x + 0.853
APC(x=30000)
curve(MPC,0,30000)
?curve
curve(MPC,0,30000,xlab = "Income",ylab = "MPC")
par(mfrow=c(1,2))
curve(MPC,0,30000,xlab = "Income",ylab = "MPC")
curve(APC,0,30000,xlab = "Income",ylab = "APC")
par(mfrow=c(1,1))
X=abs(rnorm(1000))
summary(X)
(max(X))^2==max(X^2)
max(X^4)==(max(X^2))^2
-3/2+1
Sig=diag(2); Sig[1,2]=Sig[2,1]=0.5
Sig
X = MASS::mvrnorm(n=1e6,mu=rep(0,2),Sigma = Sig)
dim(X)
cor.test(sin(X[,1],X[,2]*sin(X[,2])))
cor.test(sin(X[,1]),X[,2]*sin(X[,2]))
X = MASS::mvrnorm(n=1e6,mu=rep(0,2),Sigma = Sig)
cor.test(sin(X[,1]),X[,2]*sin(X[,2]))
X = MASS::mvrnorm(n=1e6,mu=rep(0,2),Sigma = Sig)
cor.test(sin(X[,1]),X[,2]*sin(X[,2]))
X = MASS::mvrnorm(n=1e6,mu=rep(0,2),Sigma = Sig)
cor.test(sin(X[,1]),X[,2]*sin(X[,2]))
42-7.5
cov
0.5^2
0.5^2*2.5
0.3^2*2.5
sqrt(0.3^2*2.5)
sqrt(0.5^2*2.5)
1/sqrt(2)
?plot
?plot
plot(x <- sort(rnorm(47)), type = "s", main = "plot(x, type = \"s\")")
plot(x <- sort(rnorm(47)), type = "s", main = "plot(x, type = \"s\")",cex.lab=2, cex.axis=2, cex.main=2)
f1=function(x) sin(x)*dnorm(x)
integrate(f1,-Inf,Inf)
f2=function(x) (sin(x))^2*dnorm(x)
integrate(f2,-Inf,Inf)
sqrt(0.4323324)
m=matrix(1:9,3)
m
EM=eigen(m)
EM$values
EM$vectors
estrpac::mddtest.boot
?t.test
t.test
1-2*pnorm(abs(2.248))
2*pnorm(abs(2.248))
2*(1-pnorm(abs(2.248)))
2*pnorm(-abs(2.248))
597.21+94+1050.45
vignette("discreteQ")
discreteQ::discreteQ
discreteQ:::dq_univariate
?stats::weighted.mean
N=200
d=rep(NA,N)
N=200
Y=rep(NA,(N+1)); set.seed(0) ; Y[1]=1+rnorm(1)
N=200
Y=Eps=rep(NA,(N+1)); set.seed(0) ;Eps[1] = rnorm(1);  Y[1]=1+Eps[1]
N=200
Y=Eps=rep(NA,(N+1)); set.seed(0) ;Eps[1] = rnorm(1);  Y[1]=1+Eps[1]
for (t in 2:(N+1)) {Eps[t]=Eps[t-1]+rnorm(1); Y[t] = 1 + Eps[t] }
summary(Y[-1])
summary(diff(Y[-1]))
N=200
Y=rep(NA,(N+1)); set.seed(0); Y[1]=1+rnorm(1)
for (t in 2:(N+1)) {Y[t] = 1 + Y[t-1] }
summary(Y[-1])
summary(diff(Y[-1]))
Y=rep(NA,(N+1)); set.seed(0); Y[1]=1+rnorm(1)
for (t in 2:(N+1)) {Y[t] = 1 + Y[t-1] +rnorm(1) }
summary(Y[-1])
summary(diff(Y[-1]))
tseries::adf.test(diff(Y[-1]))
Box.test(diff(Y[-1]))
Box.test(Y[-1])
sample(c(0,1),20,replace = T,prob = c(0.2,0.8))
sample(c(1,0),20,replace = T,prob = c(0.2,0.8))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
mean(sample(c(0,1),20,replace = T,prob = c(0.2,0.8)))
#QTT
#Simulations
n = 100
n=1000
X = rnorm(n)
U = X^2
Z = rchisq(n,df=1)
Xt = X/(1+Z)
summary(data.frame(U,X,Xt,Z))
U = scale(X^2)
summary(data.frame(U,X,Xt,Z))
Ut = U/(1+Z)
energy::dcov.test(Xt,Z,R=999)
estrpac::mddtest.boot(Ut,cbind(Xt,Z),B=999)
length(seq(0.05,0.95,by=0.1))
length(seq(0.05,0.95,by=0.01))
#This file produces curves of the upper bound on the measures of mean-dependence
# using both the Gaussian and the MMD kernels.
setwd(dirname(rstudioapi::getActiveDocumentContext()$path)) #set working directory path to source file
#load packages
library(estrpac)
library(MASS)
library(pbapply)
fGauss=function(p) sqrt(5^(-p/2) - 3^(-p))
fMMD = function(p) sqrt(2*p-4*(gamma((p+1)/2)/gamma(p/2))^2)
fDL = function(p) sqrt(2^(-p) - (2/3)^(2*p))
#This file produces curves of the upper bound on the measures of mean-dependence
# using both the Gaussian and the MMD kernels.
setwd(dirname(rstudioapi::getActiveDocumentContext()$path)) #set working directory path to source file
#load packages
library(estrpac)
library(MASS)
library(pbapply)
fGauss=function(p) sqrt(5^(-p/2) - 3^(-p))
fMMD = function(p) sqrt(2*p-4*(gamma((p+1)/2)/gamma(p/2))^2)
fDL = function(p) sqrt(2^(-p) - (2/3)^(2*p))
pmax=30
pdf("Fig_Kernel_Var.pdf")
curve(fMMD,1,pmax,ylim=c(-0.1,1.1),xlab = latex2exp::TeX("$p_z$"),ylab = "Standard Deviation")
curve(fGauss,1,pmax,add = T,lty=2,col="blue")
curve(fDL,1,pmax,add = T,lty=3,col="darkgreen")
lines(1:pmax,mESC6$fitted.values,lty = 4,col=4) #generated below
abline(h=0,lty=4,col="red") #the zero lower bound on variance
rm(list = ls())
rm(list = ls()) #clear memory
#load packages
library(estrpac)
library(MASS)
library(pbapply)
fGauss=function(p) sqrt(5^(-p/2) - 3^(-p))
fMMD = function(p) sqrt(2*p-4*(gamma((p+1)/2)/gamma(p/2))^2)
fDL = function(p) sqrt(2^(-p) - (2/3)^(2*p))
pmax=30
# data:  MM[, 1] * sin(MM[, 1]) * sin(MM[, 2])
# t = 0.53675, df = 1e+06, p-value = 0.5914
# alternative hypothesis: true mean is not equal to 0
# 95 percent confidence interval:
#   -0.0008137986  0.0014276356
# sample estimates:
#   mean of x
# 0.0003069185
#--------------------------------------------------------->
#compute the variance of the ESC6 kernel using simulation
fnsimESC6=function(pz){
n=250; R=500; Kern_array = array(NA,dim = c(n,n,R))
for(l in 1:R){
set.seed(l)
Z=mvrnorm(n=n,mu=rep(0,pz),Sigma = diag(pz))
Kern_array[,,l]=Kern.fun_Esc6(Z)
}
sdMat = matrix(0,n,n)
for(i in 1:n){
for (j in 1:i) {
if(i!=j){
sdMat[i,j] = sd(c(Kern_array[i,j,]))
sdMat[j,i]=sdMat[i,j]
}#ensure zeroes on diagonal
}
}
mean((c(sdMat)))
}
fnsimESC6=Vectorize(fnsimESC6)
#illustration
system.time((tsE=fnsimESC6(pz=8)))
vsdESC6 = pbsapply(1:pmax,fnsimESC6,cl=4)
